Loading the data directly into radiant
if (file.exists("data/cf_demo.rds")) {
r_data[["cf_demo_wrk"]] <- readr::read_rds("data/cf_demo.rds")
register("cf_demo_wrk")
r_data[["ratings0_wrk"]] <- readr::read_rds("data/ratings0.rds")
register("ratings0_wrk")
r_data[["ratings50_wrk"]] <- readr::read_rds("data/ratings50.rds")
register("ratings50_wrk")
r_data[["ratings80_wrk"]] <- readr::read_rds("data/ratings80.rds")
register("ratings80_wrk")
r_data[["ulratings_wrk"]] <- readr::read_rds("data/ulratings.rds")
register("ulratings_wrk")
} else {
stop("Are you using the Rstudio project folder for 'crs'?\\nIt should say 'Project: crs' at the top-right of your screen", call. = FALSE)
}
We will start with the cf_demo_wrk data. See the dataset description in Data > Manage. The results are equivalent to what you will see in the cf_demo.xlsx file. Note the data filter used to estimate the model on a training dataset (i.e., U1-U10) and predict for user U11
result <- crs(
dataset = "cf_demo_wrk",
id = "users",
prod = "movies",
pred = c("M6", "M7", "M8", "M9", "M10"),
rate = "ratings",
data_filter = "training == 1"
)
summary(result)
Collaborative filtering
Data : cf_demo_wrk
Filter : training == 1
User id : users
Product id : movies
Predict for: M6, M7, M8, M9, M10
Recommendations:
users product rating average cf ranking avg_rank cf_rank
U11 M6 3.30 4.10 3 1
U11 M7 2.70 2.08 5 4
U11 M8 3.50 1.70 2 5
U11 M9 2.90 2.13 4 3
U11 M10 4.10 2.71 1 2
For user in the hold-out sample we have ratings on movies 14-25
result <- pivotr(
dataset = "ratings0_wrk",
cvars = "rating",
data_filter = "training == 0 & product %in% paste0('mov', 14:25)",
nr = 5
)
plot(result, custom = TRUE) +
labs(title = "Ratings for users 61-100 on movies 14-25")
result <- crs(
dataset = "ratings0_wrk",
id = "user",
prod = "product",
pred = "mov14:mov25",
rate = "rating",
data_filter = "training == 1"
)
summary(result)
Collaborative filtering
Data : ratings0_wrk
Filter : training == 1
User id : user
Product id : product
Predict for: mov14, mov15, mov16, mov17, mov18, mov19, mov20, mov21, mov22, mov23, mov24, mov25
Rows shown : 36 out of 480
Summary:
- Average rating picks the best product 30.0% of the time
- Collaborative filtering picks the best product 60.0% of the time
- Pick based on average rating is in the top 3 products 47.5% of the time
- Pick based on collaborative filtering is in the top 3 products 80.0% of the time
- Top 3 based on average ratings contains the best product 60.0% of the time
- Top 3 based on collaborative filtering contains the best product 80.0% of the time
Recommendations:
user product rating average cf ranking avg_rank cf_rank
61 mov14 4 3.55 1.89 1 1 4
61 mov15 2 2.78 2.38 6 10 3
61 mov16 3 3.02 1.71 2 6 6
61 mov17 3 2.60 1.78 2 12 5
61 mov18 2 3.15 2.50 6 4 1
61 mov19 1 2.67 1.14 11 11 12
61 mov20 3 3.05 1.47 2 5 10
61 mov21 2 2.92 1.50 6 9 9
61 mov22 2 3.20 1.61 6 3 8
61 mov23 1 3.38 1.66 11 2 7
61 mov24 2 2.97 2.39 6 7 2
61 mov25 3 2.93 1.46 2 8 11
62 mov14 2 3.55 2.60 6 1 9
62 mov15 4 2.78 2.67 2 10 8
62 mov16 5 3.02 4.63 1 6 1
62 mov17 4 2.60 4.16 2 12 4
62 mov18 1 3.15 1.87 10 4 12
62 mov19 1 2.67 3.25 10 11 7
62 mov20 3 3.05 4.44 4 5 2
62 mov21 3 2.92 4.25 4 9 3
62 mov22 2 3.20 3.27 6 3 6
62 mov23 1 3.38 2.09 10 2 11
62 mov24 2 2.97 2.28 6 7 10
62 mov25 2 2.93 3.57 6 8 5
63 mov14 2 3.55 2.59 7 1 10
63 mov15 2 2.78 3.01 7 10 7
63 mov16 4 3.02 4.33 3 6 1
63 mov17 5 2.60 3.98 1 12 3
63 mov18 2 3.15 2.25 7 4 11
63 mov19 1 2.67 2.77 11 11 8
63 mov20 5 3.05 4.00 1 5 2
63 mov21 3 2.92 3.88 4 9 4
63 mov22 3 3.20 3.05 4 3 6
63 mov23 1 3.38 1.96 11 2 12
63 mov24 2 2.97 2.64 7 7 9
63 mov25 3 2.93 3.25 4 8 5
plot(result)
result <- crs(
dataset = "ratings50_wrk",
id = "user",
prod = "product",
pred = "mov14:mov25",
rate = "rating",
data_filter = "training == 1"
)
plot(result)
result <- crs(
dataset = "ratings80_wrk",
id = "user",
prod = "product",
pred = "mov14:mov25",
rate = "rating",
data_filter = "training == 1"
)
plot(result)
result <- regress(
dataset = "ulratings_wrk",
rvar = "rating",
evar = c("talk", "sex", "action", "story"),
data_filter = "training == 1"
)
summary(result)
Linear regression (OLS)
Data : ulratings_wrk
Filter : training == 1
Response variable : rating
Explanatory variables: talk, sex, action, story
Null hyp.: the effect of x on rating is zero
Alt. hyp.: the effect of x on rating is not zero
coefficient std.error t.value p.value
(Intercept) 3.362 0.151 22.223 < .001 ***
talk -0.032 0.014 -2.308 0.021 *
sex -0.050 0.020 -2.534 0.011 *
action 0.100 0.024 4.105 < .001 ***
story -0.091 0.024 -3.738 < .001 ***
Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
R-squared: 0.048, Adjusted R-squared: 0.046
F-statistic: 16.485 df(4,1295), p.value < .001
Nr obs: 1,300
pred <- predict(result, pred_data = "ulratings_wrk")
print(pred, n = 10)
Linear regression (OLS)
Data : ulratings_wrk
Filter : training == 1
Response variable : rating
Explanatory variables: talk, sex, action, story
Prediction dataset : ulratings_wrk
Rows shown : 10 of 2,500
talk sex action story Prediction 2.5% 97.5% +/-
1 7 9 9 3.063 0.361 5.764 2.701
9 5 6 6 2.882 0.181 5.583 2.701
1 4 10 7 3.492 0.791 6.194 2.701
10 6 1 2 2.666 -0.038 5.369 2.704
3 7 5 5 2.964 0.263 5.665 2.701
7 9 8 7 2.855 0.155 5.556 2.700
9 8 8 4 3.114 0.407 5.820 2.706
2 10 10 9 2.981 0.279 5.684 2.702
6 1 7 4 3.457 0.753 6.161 2.704
5 7 9 10 2.845 0.144 5.547 2.702
store(pred, data = "ulratings_wrk", name = "aggregate")
Predictions based only on ratings from user 1 on movies 1-13 (note the filter).
result <- regress(
dataset = "ulratings_wrk",
rvar = "rating",
evar = c("talk", "sex", "action", "story"),
data_filter = "user == 1 & training == 1"
)
summary(result)
Linear regression (OLS)
Data : ulratings_wrk
Filter : user == 1 & training == 1
Response variable : rating
Explanatory variables: talk, sex, action, story
Null hyp.: the effect of x on rating is zero
Alt. hyp.: the effect of x on rating is not zero
coefficient std.error t.value p.value
(Intercept) 2.246 0.371 6.055 < .001 ***
talk 0.412 0.034 12.243 < .001 ***
sex -0.158 0.048 -3.281 0.011 *
action -0.104 0.059 -1.745 0.119
story 0.028 0.059 0.469 0.651
Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
R-squared: 0.969, Adjusted R-squared: 0.954
F-statistic: 63.344 df(4,8), p.value < .001
Nr obs: 13
pred <- predict(result, pred_data = "ulratings_wrk")
print(pred, n = 10)
Linear regression (OLS)
Data : ulratings_wrk
Filter : user == 1 & training == 1
Response variable : rating
Explanatory variables: talk, sex, action, story
Prediction dataset : ulratings_wrk
Rows shown : 10 of 2,500
talk sex action story Prediction 2.5% 97.5% +/-
1 7 9 9 0.871 -0.016 1.758 0.887
9 5 6 6 4.706 3.827 5.585 0.879
1 4 10 7 1.184 0.291 2.078 0.894
10 6 1 2 5.367 4.421 6.313 0.946
3 7 5 5 1.997 1.121 2.874 0.876
7 9 8 7 3.073 2.204 3.941 0.869
9 8 8 4 3.970 2.960 4.979 1.010
2 10 10 9 0.706 -0.213 1.625 0.919
6 1 7 4 3.943 2.987 4.898 0.956
5 7 9 10 2.545 1.647 3.443 0.898
store(pred, data = "ulratings_wrk", name = "user1")
Generate predictions for each user separately (note the filter). We could use a loop to do this but we can actually get predictions for all users using interaction terms. Hint: You might need something like this for Pentathlon III
result <- regress(
dataset = "ulratings_wrk",
rvar = "rating",
evar = c("user", "talk", "sex", "action", "story"),
int = c("user:talk", "user:sex", "user:action", "user:story"),
data_filter = "training == 1"
)
summary(result)
Linear regression (OLS)
Data : ulratings_wrk
Filter : training == 1
Response variable : rating
Explanatory variables: user, talk, sex, action, story
Null hyp.: the effect of x on rating is zero
Alt. hyp.: the effect of x on rating is not zero
coefficient std.error t.value p.value
(Intercept) 2.246 0.694 3.235 0.001 **
user|2 0.889 0.982 0.906 0.365
user|3 1.056 0.982 1.075 0.283
user|4 0.232 0.982 0.236 0.813
user|5 2.105 0.982 2.144 0.032 *
user|6 -0.319 0.982 -0.325 0.746
user|7 0.654 0.982 0.666 0.505
user|8 0.687 0.982 0.700 0.484
user|9 1.241 0.982 1.264 0.207
user|10 0.650 0.982 0.662 0.508
user|11 1.270 0.982 1.293 0.196
user|12 1.849 0.982 1.883 0.060 .
user|13 2.072 0.982 2.110 0.035 *
user|14 1.280 0.982 1.304 0.193
user|15 0.782 0.982 0.797 0.426
user|16 0.596 0.982 0.607 0.544
user|17 0.998 0.982 1.017 0.310
user|18 1.148 0.982 1.170 0.243
user|19 1.817 0.982 1.851 0.065 .
user|20 1.225 0.982 1.247 0.213
user|21 -0.526 0.982 -0.536 0.592
user|22 0.984 0.982 1.002 0.317
user|23 0.241 0.982 0.246 0.806
user|24 0.780 0.982 0.795 0.427
user|25 -0.074 0.982 -0.075 0.940
user|26 1.278 0.982 1.302 0.193
user|27 1.041 0.982 1.061 0.289
user|28 0.997 0.982 1.016 0.310
user|29 2.499 0.982 2.545 0.011 *
user|30 -0.241 0.982 -0.246 0.806
user|31 1.052 0.982 1.072 0.284
user|32 1.708 0.982 1.740 0.082 .
user|33 1.058 0.982 1.078 0.281
user|34 1.450 0.982 1.477 0.140
user|35 0.734 0.982 0.748 0.455
user|36 -0.158 0.982 -0.161 0.872
user|37 0.815 0.982 0.831 0.406
user|38 2.005 0.982 2.042 0.041 *
user|39 2.807 0.982 2.859 0.004 **
user|40 1.558 0.982 1.587 0.113
user|41 0.840 0.982 0.855 0.393
user|42 1.221 0.982 1.243 0.214
user|43 1.566 0.982 1.595 0.111
user|44 2.225 0.982 2.266 0.024 *
user|45 1.015 0.982 1.034 0.302
user|46 1.658 0.982 1.689 0.092 .
user|47 0.980 0.982 0.998 0.319
user|48 0.634 0.982 0.646 0.518
user|49 0.490 0.982 0.499 0.618
user|50 1.580 0.982 1.610 0.108
user|51 0.813 0.982 0.828 0.408
user|52 2.734 0.982 2.785 0.005 **
user|53 1.304 0.982 1.329 0.184
user|54 1.141 0.982 1.162 0.246
user|55 1.986 0.982 2.023 0.043 *
user|56 1.507 0.982 1.535 0.125
user|57 0.567 0.982 0.577 0.564
user|58 1.780 0.982 1.813 0.070 .
user|59 0.444 0.982 0.452 0.651
user|60 1.581 0.982 1.611 0.108
user|61 0.044 0.982 0.045 0.964
user|62 1.606 0.982 1.636 0.102
user|63 1.316 0.982 1.340 0.181
user|64 1.932 0.982 1.968 0.049 *
user|65 -0.276 0.982 -0.282 0.778
user|66 1.069 0.982 1.088 0.277
user|67 1.039 0.982 1.059 0.290
user|68 1.526 0.982 1.554 0.120
user|69 -0.358 0.982 -0.365 0.715
user|70 1.526 0.982 1.554 0.120
user|71 -0.697 0.982 -0.710 0.478
user|72 1.986 0.982 2.023 0.043 *
user|73 0.342 0.982 0.348 0.728
user|74 0.050 0.982 0.050 0.960
user|75 0.629 0.982 0.640 0.522
user|76 1.306 0.982 1.331 0.184
user|77 1.042 0.982 1.062 0.289
user|78 1.322 0.982 1.346 0.179
user|79 1.790 0.982 1.823 0.069 .
user|80 1.594 0.982 1.624 0.105
user|81 0.381 0.982 0.388 0.698
user|82 1.751 0.982 1.784 0.075 .
user|83 1.901 0.982 1.936 0.053 .
user|84 1.351 0.982 1.377 0.169
user|85 1.918 0.982 1.953 0.051 .
user|86 0.349 0.982 0.355 0.723
user|87 0.387 0.982 0.394 0.694
user|88 2.672 0.982 2.722 0.007 **
user|89 2.289 0.982 2.331 0.020 *
user|90 1.265 0.982 1.289 0.198
user|91 0.900 0.982 0.917 0.359
user|92 1.732 0.982 1.765 0.078 .
user|93 1.932 0.982 1.968 0.049 *
user|94 0.664 0.982 0.676 0.499
user|95 -0.457 0.982 -0.466 0.641
user|96 1.382 0.982 1.408 0.160
user|97 1.059 0.982 1.079 0.281
user|98 0.818 0.982 0.833 0.405
user|99 1.699 0.982 1.731 0.084 .
user|100 2.557 0.982 2.605 0.009 **
talk 0.412 0.063 6.541 < .001 ***
sex -0.158 0.090 -1.753 0.080 .
action -0.104 0.111 -0.933 0.351
story 0.028 0.111 0.251 0.802
user|2:talk -0.149 0.089 -1.669 0.096 .
user|3:talk -0.035 0.089 -0.392 0.695
user|4:talk -0.061 0.089 -0.681 0.496
user|5:talk -0.222 0.089 -2.496 0.013 *
user|6:talk 0.064 0.089 0.724 0.469
user|7:talk -0.054 0.089 -0.606 0.544
user|8:talk -0.182 0.089 -2.048 0.041 *
user|9:talk -0.050 0.089 -0.566 0.572
user|10:talk -0.105 0.089 -1.182 0.238
user|11:talk -0.135 0.089 -1.519 0.129
user|12:talk -0.152 0.089 -1.704 0.089 .
user|13:talk -0.218 0.089 -2.454 0.014 *
user|14:talk -0.198 0.089 -2.220 0.027 *
user|15:talk -0.086 0.089 -0.967 0.334
user|16:talk -0.023 0.089 -0.260 0.795
user|17:talk -0.100 0.089 -1.126 0.260
user|18:talk -0.217 0.089 -2.435 0.015 *
user|19:talk -0.155 0.089 -1.745 0.081 .
user|20:talk -0.285 0.089 -3.209 0.001 **
user|21:talk 0.068 0.089 0.767 0.444
user|22:talk -0.054 0.089 -0.604 0.546
user|23:talk -0.121 0.089 -1.360 0.174
user|24:talk -0.020 0.089 -0.228 0.820
user|25:talk 0.017 0.089 0.194 0.846
user|26:talk -0.623 0.089 -7.004 < .001 ***
user|27:talk -0.630 0.089 -7.079 < .001 ***
user|28:talk -0.526 0.089 -5.910 < .001 ***
user|29:talk -0.851 0.089 -9.559 < .001 ***
user|30:talk -0.470 0.089 -5.283 < .001 ***
user|31:talk -0.647 0.089 -7.270 < .001 ***
user|32:talk -0.750 0.089 -8.426 < .001 ***
user|33:talk -0.619 0.089 -6.956 < .001 ***
user|34:talk -0.679 0.089 -7.626 < .001 ***
user|35:talk -0.522 0.089 -5.870 < .001 ***
user|36:talk -0.435 0.089 -4.888 < .001 ***
user|37:talk -0.634 0.089 -7.125 < .001 ***
user|38:talk -0.766 0.089 -8.615 < .001 ***
user|39:talk -0.695 0.089 -7.810 < .001 ***
user|40:talk -0.766 0.089 -8.605 < .001 ***
user|41:talk -0.584 0.089 -6.569 < .001 ***
user|42:talk -0.669 0.089 -7.517 < .001 ***
user|43:talk -0.673 0.089 -7.562 < .001 ***
user|44:talk -0.745 0.089 -8.374 < .001 ***
user|45:talk -0.659 0.089 -7.407 < .001 ***
user|46:talk -0.682 0.089 -7.669 < .001 ***
user|47:talk -0.636 0.089 -7.149 < .001 ***
user|48:talk -0.601 0.089 -6.754 < .001 ***
user|49:talk -0.593 0.089 -6.663 < .001 ***
user|50:talk -0.655 0.089 -7.357 < .001 ***
user|51:talk -0.588 0.089 -6.613 < .001 ***
user|52:talk -0.615 0.089 -6.916 < .001 ***
user|53:talk -0.497 0.089 -5.584 < .001 ***
user|54:talk -0.545 0.089 -6.121 < .001 ***
user|55:talk -0.660 0.089 -7.413 < .001 ***
user|56:talk -0.773 0.089 -8.683 < .001 ***
user|57:talk -0.656 0.089 -7.378 < .001 ***
user|58:talk -0.570 0.089 -6.404 < .001 ***
user|59:talk -0.428 0.089 -4.810 < .001 ***
user|60:talk -0.606 0.089 -6.816 < .001 ***
user|61:talk -0.567 0.089 -6.373 < .001 ***
user|62:talk -0.529 0.089 -5.942 < .001 ***
user|63:talk -0.531 0.089 -5.973 < .001 ***
user|64:talk -0.721 0.089 -8.104 < .001 ***
user|65:talk -0.584 0.089 -6.560 < .001 ***
user|66:talk -0.670 0.089 -7.528 < .001 ***
user|67:talk -0.655 0.089 -7.357 < .001 ***
user|68:talk -0.697 0.089 -7.835 < .001 ***
user|69:talk -0.495 0.089 -5.569 < .001 ***
user|70:talk -0.625 0.089 -7.024 < .001 ***
user|71:talk -0.520 0.089 -5.846 < .001 ***
user|72:talk -0.554 0.089 -6.229 < .001 ***
user|73:talk -0.537 0.089 -6.034 < .001 ***
user|74:talk -0.618 0.089 -6.949 < .001 ***
user|75:talk -0.605 0.089 -6.804 < .001 ***
user|76:talk -0.559 0.089 -6.288 < .001 ***
user|77:talk -0.453 0.089 -5.096 < .001 ***
user|78:talk -0.458 0.089 -5.147 < .001 ***
user|79:talk -0.445 0.089 -4.999 < .001 ***
user|80:talk -0.465 0.089 -5.221 < .001 ***
user|81:talk -0.354 0.089 -3.977 < .001 ***
user|82:talk -0.437 0.089 -4.915 < .001 ***
user|83:talk -0.452 0.089 -5.080 < .001 ***
user|84:talk -0.490 0.089 -5.504 < .001 ***
user|85:talk -0.486 0.089 -5.467 < .001 ***
user|86:talk -0.311 0.089 -3.491 < .001 ***
user|87:talk -0.364 0.089 -4.086 < .001 ***
user|88:talk -0.426 0.089 -4.784 < .001 ***
user|89:talk -0.497 0.089 -5.591 < .001 ***
user|90:talk -0.365 0.089 -4.100 < .001 ***
user|91:talk -0.408 0.089 -4.583 < .001 ***
user|92:talk -0.448 0.089 -5.037 < .001 ***
user|93:talk -0.492 0.089 -5.535 < .001 ***
user|94:talk -0.422 0.089 -4.739 < .001 ***
user|95:talk -0.341 0.089 -3.835 < .001 ***
user|96:talk -0.454 0.089 -5.108 < .001 ***
user|97:talk -0.406 0.089 -4.560 < .001 ***
user|98:talk -0.415 0.089 -4.664 < .001 ***
user|99:talk -0.415 0.089 -4.660 < .001 ***
user|100:talk -0.529 0.089 -5.946 < .001 ***
user|2:sex 0.003 0.127 0.027 0.979
user|3:sex -0.050 0.127 -0.397 0.692
user|4:sex 0.023 0.127 0.183 0.855
user|5:sex -0.028 0.127 -0.219 0.826
user|6:sex 0.018 0.127 0.143 0.886
user|7:sex 0.043 0.127 0.336 0.737
user|8:sex -0.066 0.127 -0.517 0.605
user|9:sex -0.049 0.127 -0.382 0.703
user|10:sex 0.001 0.127 0.004 0.997
user|11:sex -0.016 0.127 -0.127 0.899
user|12:sex -0.166 0.127 -1.309 0.191
user|13:sex 0.060 0.127 0.471 0.638
user|14:sex -0.012 0.127 -0.094 0.925
user|15:sex -0.125 0.127 -0.981 0.327
user|16:sex -0.099 0.127 -0.779 0.436
user|17:sex -0.058 0.127 -0.453 0.651
user|18:sex 0.113 0.127 0.890 0.374
user|19:sex -0.044 0.127 -0.343 0.732
user|20:sex 0.111 0.127 0.875 0.382
user|21:sex 0.017 0.127 0.137 0.891
user|22:sex 0.066 0.127 0.519 0.604
user|23:sex 0.030 0.127 0.233 0.816
user|24:sex -0.042 0.127 -0.329 0.742
user|25:sex 0.039 0.127 0.309 0.758
user|26:sex 0.029 0.127 0.231 0.817
user|27:sex 0.147 0.127 1.157 0.248
user|28:sex 0.063 0.127 0.499 0.618
user|29:sex 0.236 0.127 1.854 0.064 .
user|30:sex 0.066 0.127 0.521 0.603
user|31:sex 0.161 0.127 1.264 0.207
user|32:sex 0.108 0.127 0.852 0.394
user|33:sex 0.098 0.127 0.774 0.439
user|34:sex 0.125 0.127 0.983 0.326
user|35:sex 0.073 0.127 0.576 0.565
user|36:sex 0.051 0.127 0.404 0.686
user|37:sex -0.024 0.127 -0.188 0.851
user|38:sex 0.068 0.127 0.533 0.594
user|39:sex -0.079 0.127 -0.621 0.535
user|40:sex 0.121 0.127 0.952 0.341
user|41:sex 0.089 0.127 0.700 0.484
user|42:sex 0.004 0.127 0.035 0.972
user|43:sex -0.053 0.127 -0.420 0.674
user|44:sex -0.011 0.127 -0.084 0.933
user|45:sex 0.092 0.127 0.722 0.470
user|46:sex 0.126 0.127 0.989 0.323
user|47:sex 0.068 0.127 0.532 0.595
user|48:sex 0.031 0.127 0.241 0.810
user|49:sex 0.107 0.127 0.841 0.401
user|50:sex 0.185 0.127 1.458 0.145
user|51:sex 0.478 0.127 3.758 < .001 ***
user|52:sex 0.436 0.127 3.427 < .001 ***
user|53:sex 0.346 0.127 2.721 0.007 **
user|54:sex 0.585 0.127 4.602 < .001 ***
user|55:sex 0.551 0.127 4.330 < .001 ***
user|56:sex 0.677 0.127 5.325 < .001 ***
user|57:sex 0.648 0.127 5.096 < .001 ***
user|58:sex 0.482 0.127 3.789 < .001 ***
user|59:sex 0.456 0.127 3.583 < .001 ***
user|60:sex 0.367 0.127 2.890 0.004 **
user|61:sex 0.448 0.127 3.523 < .001 ***
user|62:sex 0.503 0.127 3.958 < .001 ***
user|63:sex 0.475 0.127 3.738 < .001 ***
user|64:sex 0.662 0.127 5.206 < .001 ***
user|65:sex 0.562 0.127 4.421 < .001 ***
user|66:sex 0.526 0.127 4.138 < .001 ***
user|67:sex 0.356 0.127 2.802 0.005 **
user|68:sex 0.597 0.127 4.696 < .001 ***
user|69:sex 0.428 0.127 3.367 < .001 ***
user|70:sex 0.398 0.127 3.126 0.002 **
user|71:sex 0.442 0.127 3.473 < .001 ***
user|72:sex 0.402 0.127 3.161 0.002 **
user|73:sex 0.665 0.127 5.229 < .001 ***
user|74:sex 0.729 0.127 5.729 < .001 ***
user|75:sex 0.502 0.127 3.949 < .001 ***
user|76:sex 0.062 0.127 0.491 0.623
user|77:sex 0.020 0.127 0.155 0.877
user|78:sex -0.179 0.127 -1.406 0.160
user|79:sex -0.163 0.127 -1.280 0.201
user|80:sex -0.367 0.127 -2.889 0.004 **
user|81:sex -0.124 0.127 -0.977 0.329
user|82:sex -0.130 0.127 -1.022 0.307
user|83:sex -0.246 0.127 -1.938 0.053 .
user|84:sex 0.125 0.127 0.985 0.325
user|85:sex -0.219 0.127 -1.726 0.085 .
user|86:sex -0.278 0.127 -2.187 0.029 *
user|87:sex -0.067 0.127 -0.531 0.596
user|88:sex -0.236 0.127 -1.853 0.064 .
user|89:sex -0.357 0.127 -2.806 0.005 **
user|90:sex -0.196 0.127 -1.545 0.123
user|91:sex -0.047 0.127 -0.371 0.711
user|92:sex -0.225 0.127 -1.770 0.077 .
user|93:sex -0.279 0.127 -2.191 0.029 *
user|94:sex -0.108 0.127 -0.846 0.398
user|95:sex -0.023 0.127 -0.180 0.857
user|96:sex -0.280 0.127 -2.199 0.028 *
user|97:sex 0.025 0.127 0.196 0.845
user|98:sex -0.062 0.127 -0.485 0.627
user|99:sex 0.054 0.127 0.426 0.670
user|100:sex -0.277 0.127 -2.177 0.030 *
user|2:action 0.034 0.157 0.218 0.827
user|3:action 0.069 0.157 0.437 0.662
user|4:action -0.101 0.157 -0.641 0.522
user|5:action -0.238 0.157 -1.515 0.130
user|6:action 0.176 0.157 1.115 0.265
user|7:action 0.014 0.157 0.088 0.930
user|8:action -0.109 0.157 -0.695 0.487
user|9:action 0.116 0.157 0.734 0.463
user|10:action -0.022 0.157 -0.138 0.890
user|11:action -0.192 0.157 -1.220 0.223
user|12:action -0.047 0.157 -0.296 0.767
user|13:action -0.091 0.157 -0.580 0.562
user|14:action -0.149 0.157 -0.945 0.345
user|15:action -0.095 0.157 -0.604 0.546
user|16:action 0.102 0.157 0.645 0.519
user|17:action -0.070 0.157 -0.444 0.657
user|18:action -0.075 0.157 -0.479 0.632
user|19:action 0.025 0.157 0.157 0.875
user|20:action -0.221 0.157 -1.403 0.161
user|21:action 0.113 0.157 0.718 0.473
user|22:action 0.122 0.157 0.776 0.438
user|23:action -0.030 0.157 -0.189 0.850
user|24:action 0.104 0.157 0.658 0.511
user|25:action 0.180 0.157 1.146 0.252
user|26:action 0.359 0.157 2.279 0.023 *
user|27:action 0.578 0.157 3.675 < .001 ***
user|28:action 0.503 0.157 3.198 0.001 **
user|29:action 0.256 0.157 1.629 0.104
user|30:action 0.673 0.157 4.274 < .001 ***
user|31:action 0.453 0.157 2.881 0.004 **
user|32:action 0.367 0.157 2.331 0.020 *
user|33:action 0.621 0.157 3.946 < .001 ***
user|34:action 0.427 0.157 2.715 0.007 **
user|35:action 0.430 0.157 2.732 0.006 **
user|36:action 0.672 0.157 4.267 < .001 ***
user|37:action 0.473 0.157 3.006 0.003 **
user|38:action 0.337 0.157 2.139 0.033 *
user|39:action 0.325 0.157 2.068 0.039 *
user|40:action 0.362 0.157 2.303 0.022 *
user|41:action 0.579 0.157 3.678 < .001 ***
user|42:action 0.400 0.157 2.542 0.011 *
user|43:action 0.329 0.157 2.092 0.037 *
user|44:action 0.307 0.157 1.948 0.052 .
user|45:action 0.456 0.157 2.895 0.004 **
user|46:action 0.490 0.157 3.112 0.002 **
user|47:action 0.480 0.157 3.052 0.002 **
user|48:action 0.533 0.157 3.388 < .001 ***
user|49:action 0.541 0.157 3.437 < .001 ***
user|50:action 0.394 0.157 2.504 0.012 *
user|51:action 0.111 0.157 0.704 0.482
user|52:action -0.173 0.157 -1.100 0.272
user|53:action -0.261 0.157 -1.661 0.097 .
user|54:action -0.027 0.157 -0.171 0.864
user|55:action -0.128 0.157 -0.811 0.418
user|56:action -0.173 0.157 -1.096 0.273
user|57:action 0.083 0.157 0.527 0.599
user|58:action -0.005 0.157 -0.035 0.972
user|59:action 0.077 0.157 0.489 0.625
user|60:action -0.065 0.157 -0.414 0.679
user|61:action 0.206 0.157 1.306 0.192
user|62:action -0.079 0.157 -0.501 0.617
user|63:action -0.089 0.157 -0.565 0.572
user|64:action -0.261 0.157 -1.661 0.097 .
user|65:action 0.074 0.157 0.468 0.640
user|66:action 0.040 0.157 0.252 0.801
user|67:action -0.174 0.157 -1.107 0.268
user|68:action -0.118 0.157 -0.748 0.455
user|69:action -0.055 0.157 -0.348 0.728
user|70:action -0.046 0.157 -0.291 0.771
user|71:action 0.105 0.157 0.664 0.507
user|72:action -0.298 0.157 -1.892 0.059 .
user|73:action 0.031 0.157 0.200 0.842
user|74:action -0.057 0.157 -0.363 0.717
user|75:action 0.078 0.157 0.495 0.620
user|76:action 0.464 0.157 2.949 0.003 **
user|77:action 0.508 0.157 3.227 0.001 **
user|78:action 0.235 0.157 1.496 0.135
user|79:action 0.440 0.157 2.795 0.005 **
user|80:action 0.381 0.157 2.424 0.016 *
user|81:action 0.488 0.157 3.100 0.002 **
user|82:action 0.219 0.157 1.391 0.165
user|83:action 0.396 0.157 2.517 0.012 *
user|84:action 0.488 0.157 3.100 0.002 **
user|85:action 0.324 0.157 2.062 0.040 *
user|86:action 0.534 0.157 3.394 < .001 ***
user|87:action 0.536 0.157 3.404 < .001 ***
user|88:action 0.215 0.157 1.367 0.172
user|89:action 0.314 0.157 1.997 0.046 *
user|90:action 0.471 0.157 2.990 0.003 **
user|91:action 0.488 0.157 3.101 0.002 **
user|92:action 0.428 0.157 2.721 0.007 **
user|93:action 0.264 0.157 1.681 0.093 .
user|94:action 0.550 0.157 3.495 < .001 ***
user|95:action 0.713 0.157 4.528 < .001 ***
user|96:action 0.433 0.157 2.749 0.006 **
user|97:action 0.399 0.157 2.538 0.011 *
user|98:action 0.567 0.157 3.601 < .001 ***
user|99:action 0.414 0.157 2.633 0.009 **
user|100:action 0.309 0.157 1.966 0.050 *
user|2:story -0.117 0.157 -0.746 0.456
user|3:story -0.166 0.157 -1.053 0.293
user|4:story 0.053 0.157 0.337 0.736
user|5:story 0.060 0.157 0.379 0.705
user|6:story -0.175 0.157 -1.111 0.267
user|7:story -0.091 0.157 -0.579 0.563
user|8:story 0.113 0.157 0.717 0.474
user|9:story -0.159 0.157 -1.011 0.312
user|10:story -0.070 0.157 -0.446 0.656
user|11:story 0.109 0.157 0.695 0.487
user|12:story -0.029 0.157 -0.186 0.853
user|13:story -0.123 0.157 -0.784 0.433
user|14:story 0.036 0.157 0.229 0.819
user|15:story 0.095 0.157 0.604 0.546
user|16:story -0.137 0.157 -0.868 0.386
user|17:story 0.023 0.157 0.149 0.882
user|18:story -0.129 0.157 -0.820 0.412
user|19:story -0.151 0.157 -0.958 0.339
user|20:story 0.013 0.157 0.080 0.936
user|21:story -0.049 0.157 -0.310 0.757
user|22:story -0.251 0.157 -1.597 0.111
user|23:story -0.029 0.157 -0.186 0.852
user|24:story -0.108 0.157 -0.685 0.494
user|25:story -0.163 0.157 -1.033 0.302
user|26:story 0.023 0.157 0.146 0.884
user|27:story -0.275 0.157 -1.748 0.081 .
user|28:story -0.140 0.157 -0.888 0.375
user|29:story -0.147 0.157 -0.933 0.351
user|30:story -0.198 0.157 -1.257 0.209
user|31:story -0.167 0.157 -1.063 0.288
user|32:story -0.106 0.157 -0.673 0.501
user|33:story -0.332 0.157 -2.107 0.035 *
user|34:story -0.114 0.157 -0.726 0.468
user|35:story -0.144 0.157 -0.914 0.361
user|36:story -0.214 0.157 -1.360 0.174
user|37:story -0.093 0.157 -0.588 0.557
user|38:story -0.080 0.157 -0.510 0.610
user|39:story -0.083 0.157 -0.529 0.597
user|40:story -0.172 0.157 -1.090 0.276
user|41:story -0.247 0.157 -1.569 0.117
user|42:story -0.006 0.157 -0.035 0.972
user|43:story 0.010 0.157 0.062 0.951
user|44:story -0.022 0.157 -0.137 0.891
user|45:story -0.108 0.157 -0.684 0.494
user|46:story -0.240 0.157 -1.527 0.127
user|47:story -0.127 0.157 -0.804 0.422
user|48:story -0.134 0.157 -0.851 0.395
user|49:story -0.147 0.157 -0.936 0.350
user|50:story -0.169 0.157 -1.077 0.282
user|51:story -0.267 0.157 -1.696 0.090 .
user|52:story -0.124 0.157 -0.789 0.431
user|53:story 0.092 0.157 0.585 0.559
user|54:story -0.171 0.157 -1.088 0.277
user|55:story -0.064 0.157 -0.409 0.683
user|56:story -0.066 0.157 -0.416 0.678
user|57:story -0.291 0.157 -1.847 0.065 .
user|58:story -0.227 0.157 -1.440 0.150
user|59:story -0.195 0.157 -1.238 0.216
user|60:story -0.177 0.157 -1.126 0.261
user|61:story -0.342 0.157 -2.170 0.030 *
user|62:story -0.266 0.157 -1.687 0.092 .
user|63:story -0.139 0.157 -0.881 0.379
user|64:story -0.053 0.157 -0.340 0.734
user|65:story -0.199 0.157 -1.263 0.207
user|66:story -0.262 0.157 -1.663 0.097 .
user|67:story 0.098 0.157 0.620 0.536
user|68:story -0.092 0.157 -0.582 0.561
user|69:story -0.052 0.157 -0.333 0.740
user|70:story -0.105 0.157 -0.668 0.504
user|71:story -0.117 0.157 -0.746 0.456
user|72:story 0.017 0.157 0.110 0.912
user|73:story -0.271 0.157 -1.721 0.086 .
user|74:story -0.159 0.157 -1.010 0.313
user|75:story -0.189 0.157 -1.199 0.231
user|76:story -0.289 0.157 -1.836 0.067 .
user|77:story -0.216 0.157 -1.375 0.170
user|78:story 0.019 0.157 0.122 0.903
user|79:story -0.131 0.157 -0.830 0.407
user|80:story -0.032 0.157 -0.202 0.840
user|81:story -0.108 0.157 -0.687 0.492
user|82:story -0.068 0.157 -0.432 0.666
user|83:story -0.074 0.157 -0.473 0.637
user|84:story -0.257 0.157 -1.630 0.104
user|85:story 0.027 0.157 0.169 0.866
user|86:story -0.121 0.157 -0.768 0.443
user|87:story -0.162 0.157 -1.026 0.305
user|88:story 0.081 0.157 0.513 0.608
user|89:story -0.046 0.157 -0.292 0.770
user|90:story -0.237 0.157 -1.502 0.133
user|91:story -0.109 0.157 -0.690 0.490
user|92:story -0.108 0.157 -0.688 0.492
user|93:story -0.021 0.157 -0.132 0.895
user|94:story -0.178 0.157 -1.128 0.260
user|95:story -0.416 0.157 -2.639 0.008 **
user|96:story -0.242 0.157 -1.538 0.124
user|97:story -0.037 0.157 -0.236 0.814
user|98:story -0.324 0.157 -2.058 0.040 *
user|99:story -0.211 0.157 -1.341 0.180
user|100:story -0.103 0.157 -0.653 0.514
Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
R-squared: 0.876, Adjusted R-squared: 0.799
F-statistic: 11.352 df(499,800), p.value < .001
Nr obs: 1,300
pred <- predict(result, pred_data = "ulratings_wrk")
print(pred, n = 10)
Linear regression (OLS)
Data : ulratings_wrk
Filter : training == 1
Response variable : rating
Explanatory variables: user, talk, sex, action, story
Prediction dataset : ulratings_wrk
Rows shown : 10 of 2,500
user talk sex action story Prediction 2.5% 97.5% +/-
1 1 7 9 9 0.871 -0.543 2.285 1.414
1 9 5 6 6 4.706 3.306 6.106 1.400
1 1 4 10 7 1.184 -0.240 2.608 1.424
1 10 6 1 2 5.367 3.860 6.874 1.507
1 3 7 5 5 1.997 0.601 3.394 1.396
1 7 9 8 7 3.073 1.689 4.457 1.384
1 9 8 8 4 3.970 2.362 5.578 1.608
1 2 10 10 9 0.706 -0.759 2.170 1.464
1 6 1 7 4 3.943 2.420 5.465 1.523
1 5 7 9 10 2.545 1.115 3.976 1.431
store(pred, data = "ulratings_wrk", name = "customized")
Lets take a look at the Data > View tab after setting Filter to training == 0. Make sure all variables are selected so you can see the difference between aggregate and customized for the different movies.
updateSelectInput(session, "dataset", selected = "ulratings_wrk")
updateCheckboxInput(session, "show_filter", value = TRUE)
updateTextInput(session, "data_filter", value = "training == 1")
updateTabsetPanel(session, "tabs_data", selected = "View")
updateTabsetPanel(session, "nav_radiant", selected = "Data")
Finally, generate predictions for each user separately (note the filter), using only the story information about a movie.
result <- regress(
dataset = "ulratings_wrk",
rvar = "rating",
evar = c("user", "story"),
int = "user:story",
data_filter = "training == 1"
)
summary(result)
Linear regression (OLS)
Data : ulratings_wrk
Filter : training == 1
Response variable : rating
Explanatory variables: user, story
Null hyp.: the effect of x on rating is zero
Alt. hyp.: the effect of x on rating is not zero
coefficient std.error t.value p.value
(Intercept) 3.964 0.760 5.215 < .001 ***
user|2 0.114 1.075 0.106 0.916
user|3 0.897 1.075 0.834 0.404
user|4 -0.335 1.075 -0.311 0.756
user|5 0.072 1.075 0.067 0.947
user|6 0.586 1.075 0.545 0.586
user|7 0.490 1.075 0.456 0.649
user|8 -0.866 1.075 -0.806 0.420
user|9 1.122 1.075 1.044 0.297
user|10 -0.029 1.075 -0.027 0.979
user|11 -0.092 1.075 -0.086 0.932
user|12 0.369 1.075 0.344 0.731
user|13 0.700 1.075 0.651 0.515
user|14 -0.322 1.075 -0.299 0.765
user|15 -0.326 1.075 -0.303 0.762
user|16 0.461 1.075 0.429 0.668
user|17 0.059 1.075 0.055 0.956
user|18 -0.024 1.075 -0.023 0.982
user|19 0.846 1.075 0.787 0.431
user|20 -0.753 1.075 -0.700 0.484
user|21 0.230 1.075 0.214 0.831
user|22 1.177 1.075 1.095 0.274
user|23 -0.473 1.075 -0.440 0.660
user|24 0.825 1.075 0.767 0.443
user|25 0.624 1.075 0.580 0.562
user|26 -1.355 1.075 -1.260 0.208
user|27 -0.714 1.075 -0.664 0.507
user|28 -0.576 1.075 -0.536 0.592
user|29 -1.187 1.075 -1.104 0.270
user|30 -1.020 1.075 -0.949 0.343
user|31 -1.103 1.075 -1.027 0.305
user|32 -1.434 1.075 -1.334 0.183
user|33 -0.651 1.075 -0.605 0.545
user|34 -1.062 1.075 -0.988 0.323
user|35 -0.990 1.075 -0.921 0.357
user|36 -0.773 1.075 -0.719 0.472
user|37 -1.718 1.075 -1.599 0.110
user|38 -1.430 1.075 -1.330 0.184
user|39 -0.638 1.075 -0.593 0.553
user|40 -1.655 1.075 -1.540 0.124
user|41 -0.806 1.075 -0.750 0.454
user|42 -1.639 1.075 -1.525 0.128
user|43 -1.668 1.075 -1.552 0.121
user|44 -1.379 1.075 -1.283 0.200
user|45 -1.397 1.075 -1.299 0.194
user|46 -0.705 1.075 -0.656 0.512
user|47 -1.296 1.075 -1.206 0.228
user|48 -1.392 1.075 -1.295 0.196
user|49 -1.259 1.075 -1.171 0.242
user|50 -0.714 1.075 -0.664 0.507
user|51 -1.052 1.075 -0.978 0.328
user|52 -0.172 1.075 -0.160 0.873
user|53 -1.386 1.075 -1.290 0.197
user|54 -0.542 1.075 -0.504 0.614
user|55 -0.743 1.075 -0.691 0.490
user|56 -1.664 1.075 -1.548 0.122
user|57 -1.307 1.075 -1.216 0.224
user|58 -0.277 1.075 -0.258 0.796
user|59 -0.625 1.075 -0.581 0.561
user|60 -1.168 1.075 -1.087 0.277
user|61 -1.520 1.075 -1.414 0.158
user|62 -0.348 1.075 -0.323 0.746
user|63 -0.759 1.075 -0.706 0.481
user|64 -1.216 1.075 -1.131 0.258
user|65 -1.981 1.075 -1.843 0.066 .
user|66 -1.336 1.075 -1.243 0.214
user|67 -2.320 1.075 -2.159 0.031 *
user|68 -1.270 1.075 -1.182 0.238
user|69 -2.257 1.075 -2.100 0.036 *
user|70 -1.198 1.075 -1.115 0.265
user|71 -2.274 1.075 -2.116 0.035 *
user|72 -0.989 1.075 -0.920 0.358
user|73 -0.918 1.075 -0.854 0.393
user|74 -1.756 1.075 -1.633 0.103
user|75 -1.358 1.075 -1.263 0.207
user|76 -0.575 1.075 -0.535 0.593
user|77 -0.211 1.075 -0.196 0.844
user|78 -1.240 1.075 -1.153 0.249
user|79 -0.098 1.075 -0.091 0.928
user|80 -1.131 1.075 -1.052 0.293
user|81 -0.734 1.075 -0.683 0.495
user|82 -0.599 1.075 -0.557 0.577
user|83 -0.378 1.075 -0.352 0.725
user|84 0.119 1.075 0.111 0.912
user|85 -0.684 1.075 -0.636 0.525
user|86 -0.809 1.075 -0.753 0.452
user|87 -0.500 1.075 -0.465 0.642
user|88 0.091 1.075 0.084 0.933
user|89 -0.783 1.075 -0.728 0.466
user|90 -0.159 1.075 -0.148 0.882
user|91 -0.320 1.075 -0.298 0.766
user|92 -0.378 1.075 -0.352 0.725
user|93 -1.030 1.075 -0.958 0.338
user|94 -0.638 1.075 -0.593 0.553
user|95 -0.612 1.075 -0.569 0.569
user|96 -0.904 1.075 -0.841 0.401
user|97 -0.191 1.075 -0.178 0.859
user|98 -0.273 1.075 -0.254 0.800
user|99 0.517 1.075 0.481 0.630
user|100 -0.494 1.075 -0.460 0.646
story -0.208 0.125 -1.667 0.096 .
user|2:story -0.064 0.176 -0.363 0.717
user|3:story -0.138 0.176 -0.783 0.434
user|4:story 0.005 0.176 0.029 0.977
user|5:story -0.085 0.176 -0.481 0.630
user|6:story -0.052 0.176 -0.294 0.769
user|7:story -0.048 0.176 -0.273 0.785
user|8:story 0.032 0.176 0.184 0.854
user|9:story -0.094 0.176 -0.534 0.593
user|10:story -0.066 0.176 -0.375 0.708
user|11:story -0.011 0.176 -0.065 0.948
user|12:story -0.126 0.176 -0.714 0.476
user|13:story -0.116 0.176 -0.657 0.512
user|14:story -0.040 0.176 -0.228 0.819
user|15:story -0.025 0.176 -0.143 0.887
user|16:story -0.114 0.176 -0.648 0.517
user|17:story -0.040 0.176 -0.224 0.823
user|18:story -0.081 0.176 -0.461 0.645
user|19:story -0.129 0.176 -0.730 0.466
user|20:story -0.032 0.176 -0.179 0.858
user|21:story 0.029 0.176 0.163 0.870
user|22:story -0.119 0.176 -0.673 0.501
user|23:story -0.012 0.176 -0.069 0.945
user|24:story -0.053 0.176 -0.302 0.763
user|25:story -0.016 0.176 -0.090 0.929
user|26:story 0.409 0.176 2.320 0.021 *
user|27:story 0.333 0.176 1.888 0.059 .
user|28:story 0.350 0.176 1.986 0.047 *
user|29:story 0.320 0.176 1.819 0.069 .
user|30:story 0.404 0.176 2.292 0.022 *
user|31:story 0.362 0.176 2.055 0.040 *
user|32:story 0.352 0.176 1.998 0.046 *
user|33:story 0.278 0.176 1.578 0.115
user|34:story 0.383 0.176 2.173 0.030 *
user|35:story 0.298 0.176 1.692 0.091 .
user|36:story 0.372 0.176 2.112 0.035 *
user|37:story 0.348 0.176 1.974 0.049 *
user|38:story 0.337 0.176 1.912 0.056 .
user|39:story 0.233 0.176 1.321 0.187
user|40:story 0.293 0.176 1.664 0.096 .
user|41:story 0.321 0.176 1.823 0.069 .
user|42:story 0.404 0.176 2.296 0.022 *
user|43:story 0.338 0.176 1.921 0.055 .
user|44:story 0.328 0.176 1.859 0.063 .
user|45:story 0.388 0.176 2.202 0.028 *
user|46:story 0.302 0.176 1.717 0.086 .
user|47:story 0.369 0.176 2.096 0.036 *
user|48:story 0.373 0.176 2.116 0.035 *
user|49:story 0.405 0.176 2.300 0.022 *
user|50:story 0.333 0.176 1.888 0.059 .
user|51:story 0.181 0.176 1.028 0.304
user|52:story 0.103 0.176 0.587 0.557
user|53:story 0.186 0.176 1.056 0.291
user|54:story 0.229 0.176 1.301 0.194
user|55:story 0.267 0.176 1.513 0.131
user|56:story 0.323 0.176 1.835 0.067 .
user|57:story 0.243 0.176 1.378 0.168
user|58:story 0.137 0.176 0.779 0.436
user|59:story 0.187 0.176 1.064 0.287
user|60:story 0.088 0.176 0.502 0.616
user|61:story 0.154 0.176 0.873 0.383
user|62:story 0.050 0.176 0.285 0.775
user|63:story 0.155 0.176 0.881 0.379
user|64:story 0.254 0.176 1.444 0.149
user|65:story 0.268 0.176 1.521 0.129
user|66:story 0.177 0.176 1.003 0.316
user|67:story 0.288 0.176 1.635 0.102
user|68:story 0.279 0.176 1.582 0.114
user|69:story 0.233 0.176 1.325 0.185
user|70:story 0.194 0.176 1.101 0.271
user|71:story 0.294 0.176 1.668 0.096 .
user|72:story 0.126 0.176 0.718 0.473
user|73:story 0.213 0.176 1.211 0.226
user|74:story 0.312 0.176 1.770 0.077 .
user|75:story 0.252 0.176 1.431 0.153
user|76:story 0.178 0.176 1.011 0.312
user|77:story 0.239 0.176 1.358 0.175
user|78:story 0.173 0.176 0.983 0.326
user|79:story 0.175 0.176 0.995 0.320
user|80:story 0.124 0.176 0.705 0.481
user|81:story 0.236 0.176 1.342 0.180
user|82:story 0.097 0.176 0.551 0.582
user|83:story 0.156 0.176 0.885 0.376
user|84:story 0.249 0.176 1.415 0.157
user|85:story 0.227 0.176 1.289 0.198
user|86:story 0.165 0.176 0.934 0.351
user|87:story 0.250 0.176 1.419 0.156
user|88:story 0.183 0.176 1.040 0.299
user|89:story 0.074 0.176 0.420 0.675
user|90:story 0.058 0.176 0.330 0.741
user|91:story 0.288 0.176 1.635 0.102
user|92:story 0.156 0.176 0.885 0.376
user|93:story 0.106 0.176 0.599 0.549
user|94:story 0.233 0.176 1.321 0.187
user|95:story 0.142 0.176 0.807 0.420
user|96:story -0.004 0.176 -0.020 0.984
user|97:story 0.335 0.176 1.904 0.057 .
user|98:story 0.122 0.176 0.693 0.488
user|99:story 0.190 0.176 1.077 0.282
user|100:story 0.063 0.176 0.359 0.720
Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
R-squared: 0.289, Adjusted R-squared: 0.161
F-statistic: 2.252 df(199,1100), p.value < .001
Nr obs: 1,300
pred <- predict(result, pred_data = "ulratings_wrk")
print(pred, n = 10)
Linear regression (OLS)
Data : ulratings_wrk
Filter : training == 1
Response variable : rating
Explanatory variables: user, story
Prediction dataset : ulratings_wrk
Rows shown : 10 of 2,500
user story Prediction 2.5% 97.5% +/-
1 9 2.096 -0.674 4.865 2.770
1 6 2.718 0.089 5.347 2.629
1 7 2.511 -0.144 5.165 2.654
1 2 3.549 0.797 6.301 2.752
1 5 2.926 0.300 5.552 2.626
1 7 2.511 -0.144 5.165 2.654
1 4 3.134 0.487 5.780 2.646
1 9 2.096 -0.674 4.865 2.770
1 4 3.134 0.487 5.780 2.646
1 10 1.888 -0.969 4.745 2.857
store(pred, data = "ulratings_wrk", name = "customized_story")
result <- evalreg(
dataset = "ulratings_wrk",
pred = c("aggregate", "user1", "customized", "customized_story"),
rvar = "rating",
train = "Both",
data_filter = "training == 1"
)
summary(result)
Evaluate predictions for regression models
Data : ulratings_wrk
Filter : training == 1
Results for : Both
Predictors : aggregate, user1, customized, customized_story
Response : rating
Type Predictor Rsq RMSE MAE
Training aggregate 0.048 1.372 1.181
Training user1 0.010 2.161 1.705
Training customized 0.876 0.495 0.400
Training customized_story 0.289 1.186 0.990
Validation aggregate 0.045 1.386 1.193
Validation user1 0.011 1.853 1.476
Validation customized 0.682 0.818 0.653
Validation customized_story 0.043 1.472 1.203
plot(result)
visualize(
dataset = "ulratings_wrk",
xvar = "aggregate",
yvar = "rating",
type = "scatter",
facet_col = "movie",
check = "line",
data_filter = "training == 0 & movie %in% paste0('mov', 14:25)",
custom = TRUE
) +
geom_segment(aes(x = 1, y = 1, xend = 5, yend = 5), color = "blue", size = .05) +
coord_cartesian(xlim = c(1,5), ylim = c(1,5)) +
labs(
title = "Predictions from aggregate attribute regression",
x = "Predicted ratings",
y = "Actual ratings"
) +
theme(legend.position = "none")
visualize(
dataset = "ulratings_wrk",
xvar = "user1",
yvar = "rating",
type = "scatter",
facet_col = "movie",
check = "line",
data_filter = "training == 0 & movie %in% paste0('mov', 14:25)",
custom = TRUE
) +
geom_segment(aes(x = 1, y = 1, xend = 5, yend = 5), color = "blue", size = .05) +
coord_cartesian(xlim = c(1,5), ylim = c(1,5)) +
labs(
title = "Predictions from user1 attribute regression",
x = "Predicted ratings",
y = "Actual ratings"
) +
theme(legend.position = "none")
visualize(
dataset = "ulratings_wrk",
xvar = "customized",
yvar = "rating",
type = "scatter",
facet_col = "movie",
check = "line",
data_filter = "training == 0 & movie %in% paste0('mov', 14:25)",
custom = TRUE
) +
geom_segment(aes(x = 1, y = 1, xend = 5, yend = 5), color = "blue", size = .05) +
coord_cartesian(xlim = c(1,5), ylim = c(1,5)) +
labs(
title = "Predictions from user-level attribute regressions",
x = "Predicted ratings",
y = "Actual ratings"
) +
theme(legend.position = "none")
visualize(
dataset = "ulratings_wrk",
xvar = "customized_story",
yvar = "rating",
type = "scatter",
facet_col = "movie",
check = "line",
data_filter = "training == 0 & movie %in% paste0('mov', 14:25)",
custom = TRUE
) +
geom_segment(aes(x = 1, y = 1, xend = 5, yend = 5), color = "blue", size = .05) +
coord_cartesian(xlim = c(1,5), ylim = c(1,5)) +
labs(
title = "Predictions from user-level single-attribute regressions (story)",
x = "Predicted ratings",
y = "Actual ratings"
) +
theme(legend.position = "none")